@Article{RamosCasaMaca:2020:InHiAp,
author = "Ramos, Ant{\^o}nio M{\'a}rio de torres and Casagrande, Helder
Luciani and Macau, Elbert Einstein Nehrer",
affiliation = "{Ita{\'u} Asset Management} and {Instituto Nacional de Pesquisas
Espaciais (INPE)} and {Instituto Nacional de Pesquisas Espaciais
(INPE)}",
title = "Investigation on the high-order approximation of the entropy
bias",
journal = "Physica A: Statistical Mechanics and its Applications",
year = "2020",
volume = "549",
pages = "e124301",
month = "July",
keywords = "Entropy bias, Mutual information, Multiple comparison analysis,
Complex network.",
abstract = "The estimation of entropy from experimental data has a
considerable bias when the discretization of the variable domain
is comparable to the sample size. In this case, the source of the
bias is the difference between the a priori distribution and the
observed distribution from sampled data. In this paper, we
estimate the entropy bias considering an infinite sum of central
moments of the binomial distribution using two probability mass
functions. We analyze the bias in the light of the ratio between
the number of the partition of the domain and the sample size. The
main motivation of this study is improving statistical hypothesis
testing in which probabilities are conceived beforehand. We
examine the adequacy of high-order approximation according to the
ratio between the sample size and the number of domain partitions.
Finally, we expand the analysis to the entropy-derived mutual
information and present an application for network
reconstruction.",
doi = "10.1016/j.physa.2020.124301",
url = "http://dx.doi.org/10.1016/j.physa.2020.124301",
issn = "0378-4371",
language = "en",
targetfile = "ramos_investigation.pdf",
urlaccessdate = "27 abr. 2024"
}